Clean up code around the crawling of a directory
authorAlex Crichton <alex@alexcrichton.com>
Fri, 21 Nov 2014 20:31:00 +0000 (12:31 -0800)
committerAlex Crichton <alex@alexcrichton.com>
Tue, 25 Nov 2014 02:58:39 +0000 (18:58 -0800)
The logic for "not recursing into `target`" was pretty hokey and needed
replacement. This commit also unfies the paths a bit to ensure that the main
loop is the same part that adds the root package itself.

This reorganization ends up closing #937

src/cargo/core/package.rs
src/cargo/ops/cargo_read_manifest.rs
src/cargo/ops/resolve.rs
src/cargo/sources/path.rs
src/rustversion.txt
tests/support/mod.rs
tests/test_cargo_compile_custom_build.rs
tests/test_cargo_package.rs

index cdb57c6bf3d5a6e7b4d044cc53cc7b7dfbc404ee..594f685cb820d16dd8fbdf2049bc6c9eb5d8c295 100644 (file)
@@ -1,4 +1,5 @@
 use std::fmt::{mod, Show, Formatter};
+use std::hash;
 use std::slice;
 use semver::Version;
 
@@ -128,6 +129,14 @@ impl PartialEq for Package {
     }
 }
 
+impl Eq for Package {}
+
+impl hash::Hash for Package {
+    fn hash(&self, into: &mut hash::sip::SipState) {
+        self.get_package_id().hash(into)
+    }
+}
+
 #[deriving(PartialEq,Clone,Show)]
 pub struct PackageSet {
     packages: Vec<Package>,
index 955c15bcc413368fb225f4189f31c458faffa6e4..5d3a6f32d8cd20b5d0ee495727aa03540bdbe8bf 100644 (file)
@@ -27,20 +27,29 @@ pub fn read_package(path: &Path, source_id: &SourceId)
 
 pub fn read_packages(path: &Path,
                      source_id: &SourceId) -> CargoResult<Vec<Package>> {
-    let mut all_packages = Vec::new();
+    let mut all_packages = HashSet::new();
     let mut visited = HashSet::<Path>::new();
 
     log!(5, "looking for root package: {}, source_id={}", path.display(), source_id);
-    try!(process_possible_package(path, &mut all_packages, source_id, &mut visited));
 
-    try!(walk(path, true, |root, dir| {
+    try!(walk(path, |dir| {
         log!(5, "looking for child package: {}", dir.display());
-        if root && dir.join("target").is_dir() { return Ok(false); }
-        if root { return Ok(true) }
+
+        // Don't recurse into git databases
         if dir.filename_str() == Some(".git") { return Ok(false); }
-        if dir.join(".git").exists() { return Ok(false); }
-        try!(process_possible_package(dir, &mut all_packages, source_id,
+
+        // Don't automatically discover packages across git submodules
+        if dir != path && dir.join(".git").exists() { return Ok(false); }
+
+        // Don't ever look at target directories
+        if dir.filename_str() == Some("target") && has_manifest(&dir.dir_path()) {
+            return Ok(false)
+        }
+
+        if has_manifest(dir) {
+            try!(read_nested_packages(dir, &mut all_packages, source_id,
                                       &mut visited));
+        }
         Ok(true)
     }));
 
@@ -48,14 +57,14 @@ pub fn read_packages(path: &Path,
         Err(human(format!("Could not find Cargo.toml in `{}`", path.display())))
     } else {
         log!(5, "all packages: {}", all_packages);
-        Ok(all_packages)
+        Ok(all_packages.into_iter().collect())
     }
 }
 
-fn walk(path: &Path, is_root: bool,
-        callback: |bool, &Path| -> CargoResult<bool>) -> CargoResult<()> {
+fn walk(path: &Path,
+        callback: |&Path| -> CargoResult<bool>) -> CargoResult<()> {
     if path.is_dir() {
-        let continues = try!(callback(is_root, path));
+        let continues = try!(callback(path));
         if !continues {
             log!(5, "not processing {}", path.display());
             return Ok(());
@@ -69,56 +78,36 @@ fn walk(path: &Path, is_root: bool,
             Err(e) => return Err(FromError::from_error(e)),
         };
         for dir in dirs.iter() {
-            try!(walk(dir, false, |a, x| callback(a, x)))
+            try!(walk(dir, |x| callback(x)))
         }
     }
 
     Ok(())
 }
 
-fn process_possible_package(dir: &Path,
-                            all_packages: &mut Vec<Package>,
-                            source_id: &SourceId,
-                            visited: &mut HashSet<Path>) -> CargoResult<()> {
-
-    if !has_manifest(dir) { return Ok(()); }
-
-    let packages = try!(read_nested_packages(dir, source_id, visited));
-    push_all(all_packages, packages);
-
-    Ok(())
-}
-
 fn has_manifest(path: &Path) -> bool {
     find_project_manifest_exact(path, "Cargo.toml").is_ok()
 }
 
-fn read_nested_packages(path: &Path, source_id: &SourceId,
-                 visited: &mut HashSet<Path>) -> CargoResult<Vec<Package>> {
-    if !visited.insert(path.clone()) { return Ok(Vec::new()) }
+fn read_nested_packages(path: &Path,
+                        all_packages: &mut HashSet<Package>,
+                        source_id: &SourceId,
+                        visited: &mut HashSet<Path>) -> CargoResult<()> {
+    if !visited.insert(path.clone()) { return Ok(()) }
 
     let manifest = try!(find_project_manifest_exact(path, "Cargo.toml"));
 
     let (pkg, nested) = try!(read_package(&manifest, source_id));
-    let mut ret = vec![pkg];
+    all_packages.insert(pkg);
 
     // Registry sources are not allowed to have `path=` dependencies because
     // they're all translated to actual registry dependencies.
     if !source_id.is_registry() {
         for p in nested.iter() {
-            ret.extend(try!(read_nested_packages(&path.join(p),
-                                            source_id,
-                                            visited)).into_iter());
+            try!(read_nested_packages(&path.join(p), all_packages, source_id,
+                                      visited));
         }
     }
 
-    Ok(ret)
-}
-
-fn push_all(set: &mut Vec<Package>, packages: Vec<Package>) {
-    for package in packages.into_iter() {
-        if set.contains(&package) { continue; }
-
-        set.push(package)
-    }
+    Ok(())
 }
index 29947e69381062207e7b1c980a4294e0f95e49e5..e1bb3948dd31203eb0daf6c80793158c9aa80d41 100644 (file)
@@ -96,7 +96,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
                 (d.get_name(), d)
             }).collect::<HashMap<_, _>>();
             summary.map_dependencies(|d| {
-                match map.find_equiv(d.get_name()) {
+                match map.get(d.get_name()) {
                     Some(&lock) if d.matches_id(lock) => d.lock_to(lock),
                     _ => d,
                 }
index af670de059e9efb683c4823f246f131eaf01dacf..731e4835e5811f5921849fb4cb68c500dda85828 100644 (file)
@@ -45,7 +45,7 @@ impl PathSource {
             return Err(internal("source has not been updated"))
         }
 
-        match self.packages.as_slice().head() {
+        match self.packages.iter().find(|p| p.get_root() == self.path) {
             Some(pkg) => Ok(pkg.clone()),
             None => Err(internal("no package found in source"))
         }
index 9520ecf601db913824687a659a440eebe2d6d9dd..6bad8ad0d5e0b11d38c71ba0cf51870c8801e920 100644 (file)
@@ -1 +1 @@
-2014-11-22
+2014-11-24
index 3c0de8f55f8b3c3918ce024cb2676fe51d8741da..87a9caa0003c878ebd11b41df4158ad68a52e062 100644 (file)
@@ -517,3 +517,4 @@ pub static PACKAGING:   &'static str = "   Packaging";
 pub static DOWNLOADING: &'static str = " Downloading";
 pub static UPLOADING:   &'static str = "   Uploading";
 pub static VERIFYING:   &'static str = "   Verifying";
+pub static ARCHIVING:   &'static str = "   Archiving";
index 93771ce63072c8c5f310c2e8f1846eb8a010bcad..9ea71f0db15904476ad313e408c2d891f0ea1171 100644 (file)
@@ -241,8 +241,8 @@ test!(links_duplicates {
 native library `a` is being linked to by more than one package, and can only be \
 linked to by one package
 
-  foo v0.5.0 (file://[..])
-  a v0.5.0 (file://[..])
+  [..] v0.5.0 (file://[..])
+  [..] v0.5.0 (file://[..])
 "));
 })
 
index 434a39f02e56333f1ccac8715d530fa03a7c6ac0..57f2e64e98b1963524d88d52a07bd4b0e87dd1b8 100644 (file)
@@ -2,9 +2,10 @@ use std::io::{File, MemReader};
 
 use tar::Archive;
 use flate2::reader::GzDecoder;
+use cargo::util::process;
 
-use support::{project, execs, cargo_dir, ResultTest};
-use support::{PACKAGING, VERIFYING, COMPILING};
+use support::{project, execs, cargo_dir, ResultTest, paths, git};
+use support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING};
 use hamcrest::{assert_that, existing_file};
 
 fn setup() {
@@ -128,7 +129,41 @@ test!(metadata_warning {
         verifying = VERIFYING,
         compiling = COMPILING,
         dir = p.url()).as_slice()));
+})
 
+test!(package_verbose {
+    let root = paths::root().join("all");
+    let p = git::repo(&root)
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.0.1"
+            authors = []
+        "#)
+        .file("src/main.rs", r#"
+            fn main() {}
+        "#)
+        .file("a/Cargo.toml", r#"
+            [project]
+            name = "a"
+            version = "0.0.1"
+            authors = []
+        "#)
+        .file("a/src/lib.rs", "");
+    p.build();
+    let cargo = process(cargo_dir().join("cargo")).unwrap()
+                                                  .cwd(root)
+                                                  .env("HOME", Some(paths::home()));
+    assert_that(cargo.clone().arg("build"), execs().with_status(0));
+    assert_that(cargo.arg("package").arg("-v")
+                                                    .arg("--no-verify"),
+                execs().with_status(0).with_stdout(format!("\
+{packaging} foo v0.0.1 ([..])
+{archiving} [..]
+{archiving} [..]
+",
+        packaging = PACKAGING,
+        archiving = ARCHIVING).as_slice()));
 })
 
 test!(package_verification {